In [1]:
    
# simple lti routines in this module
include("lti.jl")
using LTI
# plotting module
using PyPlot
    
    
Dynamics matrix, $A$, is $N$ by $N$ with $K$ fast modes and $N - K$ slow modes. $A$ is generated in one of two ways:
A, _, _ = genDyn(N, K, dFast, dSlowLow, dSlowDiff), in which case $A$ will have $K$ slow eigenvalues drawn uniform randomly between dSlowLow and dSlowLow + dSlowDiff, and $N - K$ eigenvalues at dFast.A, _, _ = genDynRot(N, K, dFast, dSlowLow, dSlowDiff, omegaScale), in which case $A$ will have $K / 2$ (then their complements) slow eigenvalues whose magnitudes are drawn uniformly randomly between dSlowLow and dSlowLow + dSlowDiff with phases drawn uniformly randomly between -omegaScale and omegaScale. There are then $N - K$ eigenvalues at dFast.Observation matrix, $C$, is $M$ by $N$, and generated by sampling $M$ random rows of the identity matrix.
$\Pi$ is the steady state covariance matrix satisfying $A\Pi A^T + Q = \Pi$
The system's state $x_t$ evolves according to, $$x_{t + 1} = Ax_t + \xi_t,\text{ with }\xi_t \sim \mathcal{N}(0, Q) \\ y_{t} = Cx_{t} + \eta_t,\text{ with }\eta_t \sim \mathcal{N}(0, R) \\ x_1 \sim \mathcal{N}(0, \Pi)$$
In [2]:
    
# parameters
K = 8; N = 100; dly = 3; r = 0.0;
dFast = 0.1; dSlowLow = 0.69; dSlowDiff = 0.3; omegaScale = pi / 4;
# input and observational noise
Q = eye(N) * 1.0;
# dynamics as well as its eigen-decomposition
A, U, D = genDynRot(N, K, dFast, dSlowLow, dSlowDiff, omegaScale);
# A = [0.912 0.129 0.0308 -0.0028; -0.1064 0.9521 0.1709 -0.1174; -0.0692 -0.1469 0.9258 -0.0659; 0.0384 0.1269 0.0210 0.9125];
# D, U = eig(A)
# stationary covariance and its sqrt for the states
P = dlyap(A, Q); p = real(sqrtm(P));
# one Kalman-Ho trial
function trial(M, T; guessK=K)
    C = genObsSamp(N, M); R = eye(M) * r;
    _, Y = runLTI(A, C, Q, R, int(T), P=P);
    Ap, _ = hoKalman(Y, 3, guessK);
    Dp = eig(Ap)[1];
    Ep = specErr(Dp, D[1:guessK]);
    return Dp, Ep;
end
    
    Out[2]:
In [3]:
    
function hankelSCovD(T, M; dly=5)
    C = genObsSamp(N, M); R = eye(M) * r;
    _, Y = runLTI(A, C, Q, R, int(T), P=P);
    H = hankel(offset -> xCov(Y, offset), dly);
    _, s, _ = svd(H);
    
    M0 = xCov(Y, 0);
    d, _ = eig(M0);
    
    return s, d;
end
    
    Out[3]:
In [4]:
    
nTrial = 50;
Ts = logspace(log10(20), log10(5000), 25);
Ms = 4:4:N;
KpHankel = zeros(length(Ms), length(Ts));
KpCov = zeros(length(Ms), length(Ts));
for (kM, M) in enumerate(Ms)
    for (kT, T) in enumerate(Ts)
        S, D = Float64[], Float64[];
        for kTrial in 1:nTrial
            s, d = hankelSCovD(int(T), M)
            S = [S, s]; D = [D, d];
        end
        sort!(S, rev=true); sort!(D, rev=true);
        dSIx = sortperm(diff(S)); dDIx = sortperm(diff(D));
        KpHankel[kM, kT] = int(maximum(dSIx[1:nTrial]) / nTrial);
        KpCov[kM, kT] = int(maximum(dDIx[1:nTrial]) / nTrial);
    end
end
    
In [5]:
    
size(KpHankel)
    
    Out[5]:
In [6]:
    
imshow(KpHankel.==K, interpolation="nearest", aspect="auto")
    
    
    Out[6]:
In [104]:
    
imshow(KpCov .== K, interpolation="nearest", aspect="auto")
    
    Out[104]:
In [7]:
    
plot(1:K * nTrial * 5, sort(DM0, rev=true)[1:K * nTrial * 5], "k.-", linewidth=2)
axvline(x=K * nTrial, color="red", linewidth=2)
xlabel("Index"); ylabel("CovarianceEigenvalue")
    
    
In [82]:
    
maximum(sortperm(diff(sort(Sh, rev=true)))[1:nTrial]) / nTrial
    
    Out[82]:
In [79]:
    
plot(y=diff(sort(Sh, rev=true))[1:K * nTrial * 5], x=1:K * nTrial * 5, Geom.line)
    
    Out[79]:
In [ ]:
    
function
    
In [4]:
    
set_default_plot_size(12cm, 8cm)
plot(layer(x=eigData, Geom.histogram),
     layer(xintercept=1 ./ (1 - D.^2), Geom.vline(color="red")),
     Guide.xlabel("Eigenvalues"), Guide.ylabel("Count"))
    
    Out[4]:
In [6]:
    
plot(x=eigError, Geom.histogram, Guide.xlabel("Eigenvalues"), Guide.ylabel("Count"))
    
    Out[6]:
In [ ]:
    
nTrial = 200;
nT = 3;
Ts = logspace(2, 5, nT)
eigDataMax = {}
eigErrorMax = {}
for(kT, T) in enumerate(Ts)
    tmp = Float64[]
    for ktrial in 1:nTrial
        print(ktrial)
        
        Y, X = runLTI(A, C, Q, R, int(T), P=P);
        YInd = C * p * randn(N, int(T))
        MtInd = xCov(YInd, dt)
        MtData = xCov(Y, dt);
        MtExact = A^dt * P;
        # deviations
        dMtData = MtData - MtExact;
        # compute eigenvalues
        eError, _ = eig(dMtData)
        tmp = [tmp, maximum(eError)]
    end
    push!(eigErrorMax, tmp)
end
    
In [ ]:
    
layers = Layer[];
for (kT, T) in enumerate(Ts)
    e, v = hist(eigErrorMax[kT], 100)
    e = (e[2:end] + e[1:end - 1]) / 2.0
    layers = [layers layer(x=e, y=v, Geom.line)]
end
plot(layers, Guide.xlabel("Max eigenvalue"), Guide.ylabel("Count"))